home *** CD-ROM | disk | FTP | other *** search
- # Source Generated with Decompyle++
- # File: in.pyc (Python 2.6)
-
- import rdflib
- from rdflib.sparql import sparqlGraph, sparqlOperators, SPARQLError
- from rdflib.sparql.sparqlOperators import getValue
- from rdflib.sparql.graphPattern import BasicGraphPattern
- from rdflib.sparql.Unbound import Unbound
- from rdflib.sparql.Query import _variablesToArray, queryObject, SessionBNode
- from rdflib.Graph import ConjunctiveGraph, Graph, BackwardCompatGraph, ReadOnlyGraphAggregate
- from rdflib import URIRef, Variable, BNode, Literal, plugin, RDF
- from rdflib.store import Store
- from rdflib.Literal import XSDToPython
- from IRIRef import NamedGraph, RemoteGraph
- from GraphPattern import ParsedAlternativeGraphPattern, ParsedOptionalGraphPattern
- from Resource import *
- from Triples import ParsedConstrainedTriples
- from QName import *
- from PreProcessor import *
- from Expression import *
- from Util import ListRedirect
- from Operators import *
- from FunctionLibrary import *
- from SolutionModifier import ASCENDING_ORDER
- from Query import AskQuery, SelectQuery
- DEBUG = False
- BinaryOperatorMapping = {
- LessThanOperator: 'sparqlOperators.lt(%s,%s)%s',
- EqualityOperator: 'sparqlOperators.eq(%s,%s)%s',
- NotEqualOperator: 'sparqlOperators.neq(%s,%s)%s',
- LessThanOrEqualOperator: 'sparqlOperators.le(%s,%s)%s',
- GreaterThanOperator: 'sparqlOperators.gt(%s,%s)%s',
- GreaterThanOrEqualOperator: 'sparqlOperators.ge(%s,%s)%s' }
- UnaryOperatorMapping = {
- LogicalNegation: 'not(%s)',
- NumericNegative: '-(%s)' }
- CAMEL_CASE_BUILTINS = {
- 'isuri': 'sparqlOperators.isURI',
- 'isiri': 'sparqlOperators.isIRI',
- 'isblank': 'sparqlOperators.isBlank',
- 'isliteral': 'sparqlOperators.isLiteral' }
-
- def convertTerm(term, queryProlog):
- '''
- Utility function for converting parsed Triple components into Unbound
- '''
- if isinstance(term, Variable):
- return Unbound(term[1:])
- if isinstance(term, BNode):
- return term
- if isinstance(term, QName):
- if not term.prefix:
- return URIRef(queryProlog.baseDeclaration + term.localname)
- if term.prefix == '_':
- import warnings
- warnings.warn('The verbatim interpretation of explicit bnode identifiers is contrary to (current) DAWG stance', SyntaxWarning)
- return SessionBNode(term.localname)
- return URIRef(queryProlog.prefixBindings[term.prefix] + term.localname)
- isinstance(term, QName)
- if isinstance(term, QNamePrefix):
- return URIRef(queryProlog.baseDeclaration + term)
- if isinstance(term, ParsedString):
- return Literal(term)
- return term
-
-
- def unRollCollection(collection, queryProlog):
- nestedComplexTerms = []
- listStart = convertTerm(collection.identifier, queryProlog)
- if not collection._list:
- yield (listStart, RDF.rest, RDF.nil)
- elif len(collection._list) == 1:
- singleItem = collection._list[0]
- if isinstance(singleItem, RDFTerm):
- nestedComplexTerms.append(singleItem)
- yield (listStart, RDF.first, convertTerm(singleItem.identifier, queryProlog))
- else:
- yield (listStart, RDF.first, convertTerm(singleItem, queryProlog))
- yield (listStart, RDF.rest, RDF.nil)
- else:
- yield (listStart, RDF.first, collection._list[0].identifier)
- prevLink = listStart
- for colObj in collection._list[1:]:
- linkNode = convertTerm(BNode(), queryProlog)
- if isinstance(colObj, RDFTerm):
- nestedComplexTerms.append(colObj)
- yield (linkNode, RDF.first, convertTerm(colObj.identifier, queryProlog))
- else:
- yield (linkNode, RDF.first, convertTerm(colObj, queryProlog))
- yield (prevLink, RDF.rest, linkNode)
- prevLink = linkNode
-
- yield (prevLink, RDF.rest, RDF.nil)
- for additionalItem in nestedComplexTerms:
- for item in unRollRDFTerm(additionalItem, queryProlog):
- yield item
-
-
-
-
- def unRollRDFTerm(item, queryProlog):
- nestedComplexTerms = []
- for propVal in item.propVals:
- for propObj in propVal.objects:
- if isinstance(propObj, RDFTerm):
- nestedComplexTerms.append(propObj)
- yield (convertTerm(item.identifier, queryProlog), convertTerm(propVal.property, queryProlog), convertTerm(propObj.identifier, queryProlog))
- continue
- yield (convertTerm(item.identifier, queryProlog), convertTerm(propVal.property, queryProlog), convertTerm(propObj, queryProlog))
-
-
- if isinstance(item, ParsedCollection):
- for rt in unRollCollection(item, queryProlog):
- yield rt
-
-
- for additionalItem in nestedComplexTerms:
- for item in unRollRDFTerm(additionalItem, queryProlog):
- yield item
-
-
-
-
- def unRollTripleItems(items, queryProlog):
- '''
- Takes a list of Triples (nested lists or ParsedConstrainedTriples)
- and (recursively) returns a generator over all the contained triple patterns
- '''
- if isinstance(items, RDFTerm):
- for item in unRollRDFTerm(items, queryProlog):
- yield item
-
- else:
- for item in items:
- if isinstance(item, RDFTerm):
- for i in unRollRDFTerm(item, queryProlog):
- yield i
-
- for i in unRollTripleItems(item, queryProlog):
- yield item
-
-
-
-
- def mapToOperator(expr, prolog, combinationArg = None):
- '''
- Reduces certain expressions (operator expressions, function calls, terms, and combinator expressions)
- into strings of their Python equivalent
- '''
- if not combinationArg or '(%s)' % combinationArg:
- pass
- combinationInvokation = ''
- if isinstance(expr, ListRedirect):
- expr = expr.reduce()
-
- if isinstance(expr, UnaryOperator):
- return UnaryOperatorMapping[type(expr)] % mapToOperator(expr.argument, prolog, combinationArg)
- if isinstance(expr, BinaryOperator):
- return BinaryOperatorMapping[type(expr)] % (mapToOperator(expr.left, prolog, combinationArg), mapToOperator(expr.right, prolog, combinationArg), combinationInvokation)
- if isinstance(expr, (Variable, Unbound)):
- return '"%s"' % expr
- if isinstance(expr, ParsedREGEXInvocation):
- if not expr.arg3 or ',"' + expr.arg3 + '"':
- pass
- return 'sparqlOperators.regex(%s,%s%s)%s' % (mapToOperator(expr.arg1, prolog, combinationArg), mapToOperator(expr.arg2, prolog, combinationArg), '', combinationInvokation)
- if isinstance(expr, BuiltinFunctionCall):
- normBuiltInName = FUNCTION_NAMES[expr.name].lower()
- normBuiltInName = CAMEL_CASE_BUILTINS.get(normBuiltInName, 'sparqlOperators.' + normBuiltInName)
- return ','.join % ([], []([ mapToOperator(i, prolog, combinationArg) for i in expr.arguments ]), combinationInvokation)
- if isinstance(expr, Literal):
- return str(expr)
- if isinstance(expr, URIRef):
- import warnings
- warnings.warn('There is the possibility of __repr__ being deprecated in python3K', DeprecationWarning, stacklevel = 3)
- return repr(expr)
- if isinstance(expr, (QName, basestring)):
- return "'%s'" % convertTerm(expr, prolog)
- if isinstance(expr, ParsedAdditiveExpressionList):
- return [] % []([ mapToOperator(item, prolog, combinationArg = 'i') for item in expr ], combinationArg)
- if isinstance(expr, FunctionCall):
- if fUri in XSDToPython:
- return "sparqlOperators.XSDCast(%s,'%s')%s" % (mapToOperator(expr.arguments[0], prolog, combinationArg = 'i'), fUri, combinationInvokation)
- raise Exception('Whats do i do with %s (a %s)?' % (expr, type(expr).__name__))
- isinstance(expr, FunctionCall)
- raise Exception('What do i do with %s (a %s)?' % (expr, type(expr).__name__))
-
-
- def createSPARQLPConstraint(filter, prolog):
- '''
- Takes an instance of either ParsedExpressionFilter or ParsedFunctionFilter
- and converts it to a sparql-p operator by composing a python string of lambda functions and SPARQL operators
- This string is then evaluated to return the actual function for sparql-p
- '''
- if not isinstance(filter.filter, ListRedirect) or filter.filter.reduce():
- pass
- reducedFilter = filter.filter
- if isinstance(reducedFilter, ParsedConditionalAndExpressionList):
- combinationLambda = [] % []([ '%s' % mapToOperator(expr, prolog, combinationArg = 'i') for expr in reducedFilter ])
- if prolog.DEBUG:
- print 'sparql-p operator(s): %s' % combinationLambda
-
- return eval(combinationLambda)
- if isinstance(reducedFilter, ParsedRelationalExpressionList):
- combinationLambda = [] % []([ '%s' % mapToOperator(expr, prolog, combinationArg = 'i') for expr in reducedFilter ])
- return eval(combinationLambda)
- if isinstance(reducedFilter, BuiltinFunctionCall):
- rt = mapToOperator(reducedFilter, prolog)
- if prolog.DEBUG:
- print 'sparql-p operator(s): %s' % rt
-
- return eval(rt)
- if isinstance(reducedFilter, (ParsedAdditiveExpressionList, UnaryOperator, FunctionCall)):
- rt = 'lambda(i): %s' % mapToOperator(reducedFilter, prolog, combinationArg = 'i')
- return eval(rt)
- rt = mapToOperator(reducedFilter, prolog)
- return eval(rt)
-
-
- def sparqlPSetup(groupGraphPattern, prolog):
- '''
- This core function takes Where Clause and two lists of rdflib.sparql.graphPattern.BasicGraphPatterns
- (the main patterns - connected by UNION - and an optional patterns)
- This is the core SELECT API of sparql-p
- '''
- basicGraphPatterns = []
- patternList = []
- (graphGraphPatterns, optionalGraphPatterns, alternativeGraphPatterns) = categorizeGroupGraphPattern(groupGraphPattern)
- (globalTPs, globalConstraints) = reorderBasicGraphPattern(groupGraphPattern[0])
- if alternativeGraphPatterns:
- for alternativeGPBlock in alternativeGraphPatterns:
- for alternativeGPs in alternativeGPBlock.nonTripleGraphPattern:
- (triples, constraints) = reorderBasicGraphPattern(alternativeGPs[0])
- constraints.extend(globalConstraints)
- alternativeGPInst = []([ t for t in unRollTripleItems(triples, prolog) ])
- []([ createSPARQLPConstraint(constr, prolog) for constr in constraints ])
- basicGraphPatterns.append(alternativeGPInst)
-
-
- elif graphGraphPatterns:
- (triples, constraints) = reorderBasicGraphPattern(graphGraphPatterns[0].nonTripleGraphPattern[0])
- for t in unRollTripleItems(triples, prolog):
- patternList.append(t)
-
- basicGraphPattern = BasicGraphPattern(patternList)
- for constr in constraints:
- basicGraphPattern.addConstraint(createSPARQLPConstraint(constr, prolog))
-
- basicGraphPatterns.append(basicGraphPattern)
- else:
- (triples, constraints) = reorderBasicGraphPattern(groupGraphPattern[0])
- for t in unRollTripleItems(triples, prolog):
- patternList.append(t)
-
- basicGraphPattern = BasicGraphPattern(patternList)
- for constr in constraints:
- basicGraphPattern.addConstraint(createSPARQLPConstraint(constr, prolog))
-
- basicGraphPatterns.append(basicGraphPattern)
- rtOptionalGraphPatterns = []
- for g in optionalGraphPatterns:
- pass
-
- return (basicGraphPatterns, rtOptionalGraphPatterns)
-
-
- def isTriplePattern(nestedTriples):
- '''
- Determines (recursively) if the BasicGraphPattern contains any Triple Patterns
- returning a boolean flag indicating if it does or not
- '''
- if isinstance(nestedTriples, list):
- for i in nestedTriples:
- if isTriplePattern(i):
- return True
- return False
-
- elif isinstance(nestedTriples, ParsedConstrainedTriples):
- if nestedTriples.triples:
- return isTriplePattern(nestedTriples.triples)
- return False
- elif isinstance(nestedTriples, ParsedConstrainedTriples) and not (nestedTriples.triples):
- return isTriplePattern(nestedTriples.triples)
- return True
-
-
- def categorizeGroupGraphPattern(gGP):
- '''
- Breaks down a ParsedGroupGraphPattern into mutually exclusive sets of
- ParsedGraphGraphPattern, ParsedOptionalGraphPattern, and ParsedAlternativeGraphPattern units
- '''
- if not isinstance(gGP, ParsedGroupGraphPattern):
- raise AssertionError, '%s is not a ParsedGroupGraphPattern' % gGP
- graphGraphPatterns = _[1]
- optionalGraphPatterns = _[2]
- alternativeGraphPatterns = _[3]
- return (graphGraphPatterns, optionalGraphPatterns, alternativeGraphPatterns)
-
-
- def validateGroupGraphPattern(gGP, noNesting = False):
- '''
- Verifies (recursively) that the Group Graph Pattern is supported
- '''
- firstGP = gGP[0]
- (graphGraphPatternNo, optionalGraphPatternNo, alternativeGraphPatternNo) = [ len(gGPKlass) for gGPKlass in categorizeGroupGraphPattern(gGP) ]
- if firstGP.triples and isTriplePattern(firstGP.triples) and isinstance(firstGP.nonTripleGraphPattern, ParsedAlternativeGraphPattern):
- raise NotImplemented(UNION_GRAPH_PATTERN_NOT_SUPPORTED, '%s' % firstGP)
- isinstance(firstGP.nonTripleGraphPattern, ParsedAlternativeGraphPattern)
- if (graphGraphPatternNo > 1 or graphGraphPatternNo) and alternativeGraphPatternNo:
- raise NotImplemented(GRAPH_GRAPH_PATTERN_NOT_SUPPORTED, '%s' % gGP)
- alternativeGraphPatternNo
- for gP in gGP:
- if noNesting and isinstance(gP.nonTripleGraphPattern, (ParsedOptionalGraphPattern, ParsedGraphGraphPattern, ParsedAlternativeGraphPattern)):
- raise NotImplemented(GROUP_GRAPH_PATTERN_NESTING_NOT_SUPPORTED, '%s' % gGP)
- isinstance(gP.nonTripleGraphPattern, (ParsedOptionalGraphPattern, ParsedGraphGraphPattern, ParsedAlternativeGraphPattern))
- if isinstance(gP.nonTripleGraphPattern, ParsedAlternativeGraphPattern):
- for _gGP in gP.nonTripleGraphPattern:
- validateGroupGraphPattern(_gGP, noNesting = True)
-
- []
- if gP.nonTripleGraphPattern:
- validateGroupGraphPattern(gP.nonTripleGraphPattern, noNesting = True)
- continue
- []
-
-
-
- def Evaluate(graph, query, passedBindings = { }, DEBUG = False):
- '''
- Takes:
- 1. a rdflib.Graph.Graph instance
- 2. a SPARQL query instance (parsed using the BisonGen parser)
- 3. A dictionary of initial variable bindings (varName -> .. rdflib Term .. )
- 4. DEBUG Flag
-
- Returns a list of tuples - each a binding of the selected variables in query order
- '''
- if query.prolog:
- query.prolog.DEBUG = DEBUG
-
- if query.query.dataSets:
- graphs = []
- for dtSet in query.query.dataSets:
- if isinstance(dtSet, NamedGraph):
- graphs.append(Graph(graph.store, dtSet))
- continue
- memStore = plugin.get('IOMemory', Store)()
- memGraph = Graph(memStore)
-
- try:
- memGraph.parse(dtSet, format = 'n3')
- except:
- memGraph.parse(dtSet)
-
- graphs.append(memGraph)
-
- tripleStore = sparqlGraph.SPARQLGraph(ReadOnlyGraphAggregate(graphs))
- else:
- tripleStore = sparqlGraph.SPARQLGraph(graph)
- graphGraphPatterns = categorizeGroupGraphPattern(query.query.whereClause.parsedGraphPattern)[0]
- gp = reorderGroupGraphPattern(query.query.whereClause.parsedGraphPattern)
- validateGroupGraphPattern(gp)
- (basicPatterns, optionalPatterns) = sparqlPSetup(gp, query.prolog)
- result = queryObject(tripleStore, basicPatterns, optionalPatterns, passedBindings)
- if result == None:
- msg = 'Errors in the patterns, no valid query object generated; '
- msg += 'pattern:\n%s\netc...' % basicPatterns[0]
- raise SPARQLError(msg)
- result == None
- if isinstance(query.query, AskQuery):
- return result.ask()
- if isinstance(query.query, SelectQuery):
- orderBy = None
- orderAsc = None
- if not query.query.solutionModifier.limitClause or int(query.query.solutionModifier.limitClause):
- pass
- limit = None
- if not query.query.solutionModifier.offsetClause or int(query.query.solutionModifier.offsetClause):
- pass
- offset = 0
- return (result.select(query.query.variables, query.query.distinct, limit, orderBy, orderAsc, offset), _variablesToArray(query.query.variables, 'selection'), result._getAllVariables(), orderBy, query.query.distinct)
- raise NotImplemented(CONSTRUCT_NOT_SUPPORTED, repr(query))
-
- OPTIONALS_NOT_SUPPORTED = 1
- UNION_GRAPH_PATTERN_NOT_SUPPORTED = 3
- GRAPH_GRAPH_PATTERN_NOT_SUPPORTED = 4
- GROUP_GRAPH_PATTERN_NESTING_NOT_SUPPORTED = 5
- CONSTRUCT_NOT_SUPPORTED = 6
- ExceptionMessages = {
- OPTIONALS_NOT_SUPPORTED: 'Nested OPTIONAL not currently supported',
- UNION_GRAPH_PATTERN_NOT_SUPPORTED: 'UNION Graph Pattern (currently) can only be combined with OPTIONAL Graph Patterns',
- GRAPH_GRAPH_PATTERN_NOT_SUPPORTED: 'Graph Graph Pattern (currently) cannot only be used once by themselves or with OPTIONAL Graph Patterns',
- GROUP_GRAPH_PATTERN_NESTING_NOT_SUPPORTED: 'Nesting of Group Graph Pattern (currently) not supported',
- CONSTRUCT_NOT_SUPPORTED: '"Construct" is not (currently) supported' }
-
- class NotImplemented(Exception):
-
- def __init__(self, code, msg):
- self.code = code
- self.msg = msg
-
-
- def __str__(self):
- return ExceptionMessages[self.code] + ' :' + self.msg
-
-
-